var runtime.mheap_

231 uses

	runtime (current package)
		arena.go#L774: 		span = mheap_.allocUserArenaChunk()
		arena.go#L926: 		lock(&mheap_.lock)
		arena.go#L927: 		mheap_.userArena.quarantineList.insert(s)
		arena.go#L928: 		unlock(&mheap_.lock)
		heapdump.go#L454: 	for _, s := range mheap_.allspans {
		heapdump.go#L480: 	for _, s := range mheap_.allspans {
		heapdump.go#L517: 	for i1 := range mheap_.arenas {
		heapdump.go#L518: 		if mheap_.arenas[i1] == nil {
		heapdump.go#L521: 		for i, ha := range mheap_.arenas[i1] {
		heapdump.go#L652: 	for _, s := range mheap_.allspans {
		heapdump.go#L675: 	for _, s := range mheap_.allspans {
		malloc.go#L451: 	mheap_.init()
		malloc.go#L533: 			hintList := &mheap_.arenaHints
		malloc.go#L535: 				hintList = &mheap_.userArena.arenaHints
		malloc.go#L537: 			hint := (*arenaHint)(mheap_.arenaHintAlloc.alloc())
		malloc.go#L562: 			mheap_.heapArenaAlloc.init(meta, arenaMetaSize, true)
		malloc.go#L585: 		if mheap_.heapArenaAlloc.next <= p && p < mheap_.heapArenaAlloc.end {
		malloc.go#L586: 			p = mheap_.heapArenaAlloc.end
		malloc.go#L599: 				mheap_.arena.init(uintptr(a), size, false)
		malloc.go#L600: 				p = mheap_.arena.end // For hint below
		malloc.go#L604: 		hint := (*arenaHint)(mheap_.arenaHintAlloc.alloc())
		malloc.go#L606: 		hint.next, mheap_.arenaHints = mheap_.arenaHints, hint
		malloc.go#L612: 		userArenaHint := (*arenaHint)(mheap_.arenaHintAlloc.alloc())
		malloc.go#L614: 		userArenaHint.next, mheap_.userArena.arenaHints = mheap_.userArena.arenaHints, userArenaHint
		malloc.go#L713: 		hint.next, mheap_.arenaHints = mheap_.arenaHints, hint
		malloc.go#L716: 		hint.next, mheap_.arenaHints = mheap_.arenaHints, hint
		mbitmap.go#L729: 				progSpan = mheap_.allocManual(npages, spanAllocPtrScalarBits)
		mbitmap.go#L1684: 	s := mheap_.allocManual(pages, spanAllocPtrScalarBits)
		mbitmap.go#L1689: 	mheap_.freeManual(s, spanAllocPtrScalarBits)
		mcache.go#L88: 		lock(&mheap_.lock)
		mcache.go#L89: 		c = (*mcache)(mheap_.cachealloc.alloc())
		mcache.go#L90: 		c.flushGen.Store(mheap_.sweepgen)
		mcache.go#L91: 		unlock(&mheap_.lock)
		mcache.go#L116: 		lock(&mheap_.lock)
		mcache.go#L117: 		mheap_.cachealloc.free(unsafe.Pointer(c))
		mcache.go#L118: 		unlock(&mheap_.lock)
		mcache.go#L156: 		if s.sweepgen != mheap_.sweepgen+3 {
		mcache.go#L159: 		mheap_.central[spc].mcentral.uncacheSpan(s)
		mcache.go#L182: 	s = mheap_.central[spc].mcentral.cacheSpan()
		mcache.go#L193: 	s.sweepgen = mheap_.sweepgen + 3
		mcache.go#L234: 	s := mheap_.alloc(npages, spc)
		mcache.go#L253: 	mheap_.central[spc].mcentral.fullSwept(mheap_.sweepgen).push(s)
		mcache.go#L264: 	sg := mheap_.sweepgen
		mcache.go#L291: 			mheap_.central[i].mcentral.uncacheSpan(s)
		mcache.go#L320: 	sg := mheap_.sweepgen
		mcache.go#L330: 	c.flushGen.Store(mheap_.sweepgen) // Synchronizes with gcStart
		mcentral.go#L112: 	sg := mheap_.sweepgen
		mcentral.go#L209: 	sg := mheap_.sweepgen
		mcentral.go#L254: 	s := mheap_.alloc(npages, c.spanclass)
		mcheckmark.go#L42: 	for _, ai := range mheap_.allArenas {
		mcheckmark.go#L43: 		arena := mheap_.arenas[ai.l1()][ai.l2()]
		mcheckmark.go#L90: 	arena := mheap_.arenas[ai.l1()][ai.l2()]
		metrics.go#L657: 		lock(&mheap_.lock)
		metrics.go#L659: 		a.mSpanInUse = uint64(mheap_.spanalloc.inuse)
		metrics.go#L661: 		a.mCacheInUse = uint64(mheap_.cachealloc.inuse)
		metrics.go#L662: 		unlock(&mheap_.lock)
		mgc.go#L691: 		if fg := p.mcache.flushGen.Load(); fg != mheap_.sweepgen {
		mgc.go#L692: 			println("runtime: p", p.id, "flushGen", fg, "!= sweepgen", mheap_.sweepgen)
		mgc.go#L1136: 	mheap_.pages.scav.index.nextGen()
		mgc.go#L1206: 				lock(&mheap_.lock)
		mgc.go#L1207: 				pp.pcache.flush(&mheap_.pages)
		mgc.go#L1208: 				unlock(&mheap_.lock)
		mgc.go#L1283: 			mheap_.enableMetadataHugePages()
		mgc.go#L1672: 	lock(&mheap_.lock)
		mgc.go#L1673: 	mheap_.sweepgen += 2
		mgc.go#L1675: 	mheap_.pagesSwept.Store(0)
		mgc.go#L1676: 	mheap_.sweepArenas = mheap_.allArenas
		mgc.go#L1677: 	mheap_.reclaimIndex.Store(0)
		mgc.go#L1678: 	mheap_.reclaimCredit.Store(0)
		mgc.go#L1679: 	unlock(&mheap_.lock)
		mgc.go#L1686: 		lock(&mheap_.lock)
		mgc.go#L1687: 		mheap_.sweepPagesPerByte = 0
		mgc.go#L1688: 		unlock(&mheap_.lock)
		mgc.go#L1738: 	lock(&mheap_.lock)
		mgc.go#L1739: 	arenas := mheap_.allArenas
		mgc.go#L1740: 	unlock(&mheap_.lock)
		mgc.go#L1742: 		ha := mheap_.arenas[ai.l1()][ai.l2()]
		mgc.go#L1907: 		lock(&mheap_.speciallock)
		mgc.go#L1908: 		s := (*specialReachable)(mheap_.specialReachableAlloc.alloc())
		mgc.go#L1909: 		unlock(&mheap_.speciallock)
		mgc.go#L1934: 		lock(&mheap_.speciallock)
		mgc.go#L1935: 		mheap_.specialReachableAlloc.free(unsafe.Pointer(s))
		mgc.go#L1936: 		unlock(&mheap_.speciallock)
		mgcmark.go#L94: 	mheap_.markArenas = mheap_.allArenas[:len(mheap_.allArenas):len(mheap_.allArenas)]
		mgcmark.go#L95: 	work.nSpanRoots = len(mheap_.markArenas) * (pagesPerArena / pagesPerSpanRoot)
		mgcmark.go#L338: 	sg := mheap_.sweepgen
		mgcmark.go#L341: 	ai := mheap_.markArenas[shard/(pagesPerArena/pagesPerSpanRoot)]
		mgcmark.go#L342: 	ha := mheap_.arenas[ai.l1()][ai.l2()]
		mgcpacer.go#L1185: 		assertWorldStoppedOrLockHeld(&mheap_.lock)
		mgcpacer.go#L1245: 		assertWorldStoppedOrLockHeld(&mheap_.lock)
		mgcpacer.go#L1262: 		lock(&mheap_.lock)
		mgcpacer.go#L1265: 		unlock(&mheap_.lock)
		mgcpacer.go#L1294: 		assertWorldStoppedOrLockHeld(&mheap_.lock)
		mgcpacer.go#L1309: 		lock(&mheap_.lock)
		mgcpacer.go#L1314: 			unlock(&mheap_.lock)
		mgcpacer.go#L1318: 		unlock(&mheap_.lock)
		mgcpacer.go#L1426: 	assertWorldStoppedOrLockHeld(&mheap_.lock)
		mgcscavenge.go#L168: 	assertWorldStoppedOrLockHeld(&mheap_.lock)
		mgcscavenge.go#L395: 			r := mheap_.pages.scavenge(n, nil, false)
		mgcscavenge.go#L661: 		mheap_.pages.scav.releasedBg.Add(released)
		mgcsweep.go#L153: 			return sweepLocker{mheap_.sweepgen, false}
		mgcsweep.go#L156: 			return sweepLocker{mheap_.sweepgen, true}
		mgcsweep.go#L164: 	if sl.sweepGen != mheap_.sweepgen {
		mgcsweep.go#L178: 				print("pacer: sweep done at heap size ", live>>20, "MB; allocated ", (live-mheap_.sweepHeapLiveBasis)>>20, "MB during sweep; swept ", mheap_.pagesSwept.Load(), " pages at ", mheap_.sweepPagesPerByte, " pages/byte\n")
		mgcsweep.go#L253: 	sg := mheap_.sweepgen
		mgcsweep.go#L254: 	for i := range mheap_.central {
		mgcsweep.go#L255: 		c := &mheap_.central[i].mcentral
		mgcsweep.go#L371: 		s := mheap_.nextSpanForSweep()
		mgcsweep.go#L393: 				mheap_.reclaimCredit.Add(npages)
		mgcsweep.go#L423: 				lock(&mheap_.lock)
		mgcsweep.go#L426: 				releasedBg := mheap_.pages.scav.releasedBg.Load()
		mgcsweep.go#L427: 				releasedEager := mheap_.pages.scav.releasedEager.Load()
		mgcsweep.go#L433: 				mheap_.pages.scav.releasedBg.Add(-releasedBg)
		mgcsweep.go#L434: 				mheap_.pages.scav.releasedEager.Add(-releasedEager)
		mgcsweep.go#L435: 				unlock(&mheap_.lock)
		mgcsweep.go#L513: 	sweepgen := mheap_.sweepgen
		mgcsweep.go#L525: 	mheap_.pagesSwept.Add(int64(s.npages))
		mgcsweep.go#L725: 			mheap_.central[spc].mcentral.fullSwept(sweepgen).push(s)
		mgcsweep.go#L731: 		mheap_.pagesInUse.Add(-s.npages)
		mgcsweep.go#L739: 			if s.list != &mheap_.userArena.quarantineList {
		mgcsweep.go#L742: 			lock(&mheap_.lock)
		mgcsweep.go#L743: 			mheap_.userArena.quarantineList.remove(s)
		mgcsweep.go#L744: 			mheap_.userArena.readyList.insert(s)
		mgcsweep.go#L745: 			unlock(&mheap_.lock)
		mgcsweep.go#L773: 				mheap_.freeSpan(s)
		mgcsweep.go#L778: 				mheap_.central[spc].mcentral.fullSwept(sweepgen).push(s)
		mgcsweep.go#L780: 				mheap_.central[spc].mcentral.partialSwept(sweepgen).push(s)
		mgcsweep.go#L819: 				mheap_.freeSpan(s)
		mgcsweep.go#L826: 					mheap_.freeManual(s, spanAllocPtrScalarBits)
		mgcsweep.go#L837: 		mheap_.central[spc].mcentral.fullSwept(sweepgen).push(s)
		mgcsweep.go#L911: 	if mheap_.sweepPagesPerByte == 0 {
		mgcsweep.go#L924: 	sweptBasis := mheap_.pagesSweptBasis.Load()
		mgcsweep.go#L926: 	liveBasis := mheap_.sweepHeapLiveBasis
		mgcsweep.go#L946: 	pagesTarget := int64(mheap_.sweepPagesPerByte*float64(newHeapLive)) - int64(callerSweepPages)
		mgcsweep.go#L947: 	for pagesTarget > int64(mheap_.pagesSwept.Load()-sweptBasis) {
		mgcsweep.go#L949: 			mheap_.sweepPagesPerByte = 0
		mgcsweep.go#L952: 		if mheap_.pagesSweptBasis.Load() != sweptBasis {
		mgcsweep.go#L980: 	assertWorldStoppedOrLockHeld(&mheap_.lock)
		mgcsweep.go#L984: 		mheap_.sweepPagesPerByte = 0
		mgcsweep.go#L1001: 		pagesSwept := mheap_.pagesSwept.Load()
		mgcsweep.go#L1002: 		pagesInUse := mheap_.pagesInUse.Load()
		mgcsweep.go#L1005: 			mheap_.sweepPagesPerByte = 0
		mgcsweep.go#L1007: 			mheap_.sweepPagesPerByte = float64(sweepDistancePages) / float64(heapDistance)
		mgcsweep.go#L1008: 			mheap_.sweepHeapLiveBasis = heapLiveBasis
		mgcsweep.go#L1012: 			mheap_.pagesSweptBasis.Store(pagesSwept)
		mgcwork.go#L119: 	lockWithRankMayAcquire(&mheap_.lock, lockRankMheap)
		mgcwork.go#L363: 	lockWithRankMayAcquire(&mheap_.lock, lockRankMheap)
		mgcwork.go#L378: 				s = mheap_.allocManual(workbufAlloc/pageSize, spanAllocWorkBuf)
		mgcwork.go#L483: 			mheap_.freeManual(span, spanAllocWorkBuf)
		mheap.go#L236: var mheap_ mheap
		mheap.go#L678: 		if ri.l2() >= uint(len(mheap_.arenas[0])) {
		mheap.go#L683: 		if ri.l1() >= uint(len(mheap_.arenas)) {
		mheap.go#L687: 	l2 := mheap_.arenas[ri.l1()]
		mheap.go#L706: 	return mheap_.arenas[ai.l1()][ai.l2()].spans[(p/pageSize)%pagesPerArena]
		mheap.go#L732: 	arena = mheap_.arenas[ai.l1()][ai.l2()]
		mheap.go#L1327: 		mheap_.pages.scav.releasedEager.Add(released)
		mheap.go#L1693: 	systemstack(func() { mheap_.scavengeAll() })
		mheap.go#L1840: 	ha := mheap_.arenas[ai.l1()][ai.l2()]
		mheap.go#L1848: 	ha := mheap_.arenas[ai.l1()][ai.l2()]
		mheap.go#L1963: 	lock(&mheap_.speciallock)
		mheap.go#L1964: 	s := (*specialfinalizer)(mheap_.specialfinalizeralloc.alloc())
		mheap.go#L1965: 	unlock(&mheap_.speciallock)
		mheap.go#L1994: 	lock(&mheap_.speciallock)
		mheap.go#L1995: 	mheap_.specialfinalizeralloc.free(unsafe.Pointer(s))
		mheap.go#L1996: 	unlock(&mheap_.speciallock)
		mheap.go#L2006: 	lock(&mheap_.speciallock)
		mheap.go#L2007: 	mheap_.specialfinalizeralloc.free(unsafe.Pointer(s))
		mheap.go#L2008: 	unlock(&mheap_.speciallock)
		mheap.go#L2151: 	lock(&mheap_.speciallock)
		mheap.go#L2152: 	s := (*specialWeakHandle)(mheap_.specialWeakHandleAlloc.alloc())
		mheap.go#L2153: 	unlock(&mheap_.speciallock)
		mheap.go#L2191: 	lock(&mheap_.speciallock)
		mheap.go#L2192: 	mheap_.specialWeakHandleAlloc.free(unsafe.Pointer(s))
		mheap.go#L2193: 	unlock(&mheap_.speciallock)
		mheap.go#L2249: 	lock(&mheap_.speciallock)
		mheap.go#L2250: 	s := (*specialprofile)(mheap_.specialprofilealloc.alloc())
		mheap.go#L2251: 	unlock(&mheap_.speciallock)
		mheap.go#L2308: 		lock(&mheap_.speciallock)
		mheap.go#L2309: 		mheap_.specialfinalizeralloc.free(unsafe.Pointer(sf))
		mheap.go#L2310: 		unlock(&mheap_.speciallock)
		mheap.go#L2314: 		lock(&mheap_.speciallock)
		mheap.go#L2315: 		mheap_.specialWeakHandleAlloc.free(unsafe.Pointer(s))
		mheap.go#L2316: 		unlock(&mheap_.speciallock)
		mheap.go#L2320: 		lock(&mheap_.speciallock)
		mheap.go#L2321: 		mheap_.specialprofilealloc.free(unsafe.Pointer(sp))
		mheap.go#L2322: 		unlock(&mheap_.speciallock)
		mheap.go#L2328: 		lock(&mheap_.speciallock)
		mheap.go#L2329: 		mheap_.specialPinCounterAlloc.free(unsafe.Pointer(s))
		mheap.go#L2330: 		unlock(&mheap_.speciallock)
		mpagealloc.go#L452: 	lock(&mheap_.lock)
		mpagealloc.go#L454: 		unlock(&mheap_.lock)
		mpagealloc.go#L461: 	unlock(&mheap_.lock)
		mpagealloc.go#L617: 	if p.test || mheap_.arenas[ai.l1()] == nil || mheap_.arenas[ai.l1()][ai.l2()] == nil {
		mstats.go#L537: 	stats.MSpanInuse = uint64(mheap_.spanalloc.inuse)
		mstats.go#L539: 	stats.MCacheInuse = uint64(mheap_.cachealloc.inuse)
		mstats.go#L583: 	lock(&mheap_.lock)
		mstats.go#L604: 	unlock(&mheap_.lock)
		panic.go#L1322: 	if mheap_.cachealloc.size == 0 { // very early
		pinner.go#L330: 		lock(&mheap_.speciallock)
		pinner.go#L331: 		rec = (*specialPinCounter)(mheap_.specialPinCounterAlloc.alloc())
		pinner.go#L332: 		unlock(&mheap_.speciallock)
		pinner.go#L359: 		lock(&mheap_.speciallock)
		pinner.go#L360: 		mheap_.specialPinCounterAlloc.free(unsafe.Pointer(counter))
		pinner.go#L361: 		unlock(&mheap_.speciallock)
		proc.go#L5643: 			mheap_.spanalloc.free(unsafe.Pointer(pp.mspancache.buf[i]))
		proc.go#L5646: 		lock(&mheap_.lock)
		proc.go#L5647: 		pp.pcache.flush(&mheap_.pages)
		proc.go#L5648: 		unlock(&mheap_.lock)
		stack.go#L196: 	lockWithRankMayAcquire(&mheap_.lock, lockRankMheap)
		stack.go#L199: 		s = mheap_.allocManual(_StackCacheSize>>_PageShift, spanAllocStack)
		stack.go#L263: 		mheap_.freeManual(s, spanAllocStack)
		stack.go#L404: 		lockWithRankMayAcquire(&mheap_.lock, lockRankMheap)
		stack.go#L408: 			s = mheap_.allocManual(npage, spanAllocStack)
		stack.go#L512: 			mheap_.freeManual(s, spanAllocStack)
		stack.go#L1262: 				mheap_.freeManual(s, spanAllocStack)
		stack.go#L1276: 			mheap_.freeManual(s, spanAllocStack)
		trace.go#L232: 	trace.minPageHeapAddr = uint64(mheap_.pages.inUse.ranges[0].base.addr())
		traceallocfree.go#L53: 	for _, s := range mheap_.allspans {